home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyc (Python 2.6)
-
- '''Parser driver.
-
- This provides a high-level interface to parse a file into a syntax tree.
-
- '''
- __author__ = 'Guido van Rossum <guido@python.org>'
- __all__ = [
- 'Driver',
- 'load_grammar']
- import os
- import logging
- import sys
- from import grammar, parse, token, tokenize, pgen
-
- class Driver(object):
-
- def __init__(self, grammar, convert = None, logger = None):
- self.grammar = grammar
- if logger is None:
- logger = logging.getLogger()
-
- self.logger = logger
- self.convert = convert
-
-
- def parse_tokens(self, tokens, debug = False):
- '''Parse a series of tokens and return the syntax tree.'''
- p = parse.Parser(self.grammar, self.convert)
- p.setup()
- lineno = 1
- column = 0
- type = None
- value = None
- start = None
- end = None
- line_text = None
- prefix = ''
- for quintuple in tokens:
- (type, value, start, end, line_text) = quintuple
- if start != (lineno, column):
- if not (lineno, column) <= start:
- raise AssertionError, ((lineno, column), start)
- (s_lineno, s_column) = start
- if lineno < s_lineno:
- prefix += '\n' * (s_lineno - lineno)
- lineno = s_lineno
- column = 0
-
- if column < s_column:
- prefix += line_text[column:s_column]
- column = s_column
-
-
- if type in (tokenize.COMMENT, tokenize.NL):
- prefix += value
- (lineno, column) = end
- if value.endswith('\n'):
- lineno += 1
- column = 0
- continue
- continue
-
- if type == token.OP:
- type = grammar.opmap[value]
-
- if debug:
- self.logger.debug('%s %r (prefix=%r)', token.tok_name[type], value, prefix)
-
- if p.addtoken(type, value, (prefix, start)):
- if debug:
- self.logger.debug('Stop.')
-
- break
-
- prefix = ''
- (lineno, column) = end
- if value.endswith('\n'):
- lineno += 1
- column = 0
- continue
- else:
- raise parse.ParseError('incomplete input', type, value, (prefix, start))
- return None.rootnode
-
-
- def parse_stream_raw(self, stream, debug = False):
- '''Parse a stream and return the syntax tree.'''
- tokens = tokenize.generate_tokens(stream.readline)
- return self.parse_tokens(tokens, debug)
-
-
- def parse_stream(self, stream, debug = False):
- '''Parse a stream and return the syntax tree.'''
- return self.parse_stream_raw(stream, debug)
-
-
- def parse_file(self, filename, debug = False):
- '''Parse a file and return the syntax tree.'''
- stream = open(filename)
-
- try:
- return self.parse_stream(stream, debug)
- finally:
- stream.close()
-
-
-
- def parse_string(self, text, debug = False):
- '''Parse a string and return the syntax tree.'''
- tokens = tokenize.generate_tokens(generate_lines(text).next)
- return self.parse_tokens(tokens, debug)
-
-
-
- def generate_lines(text):
- '''Generator that behaves like readline without using StringIO.'''
- for line in text.splitlines(True):
- yield line
-
- while True:
- yield ''
-
-
- def load_grammar(gt = 'Grammar.txt', gp = None, save = True, force = False, logger = None):
- '''Load the grammar (maybe from a pickle).'''
- if logger is None:
- logger = logging.getLogger()
-
- if gp is None:
- (head, tail) = os.path.splitext(gt)
- if tail == '.txt':
- tail = ''
-
- gp = head + tail + '.'.join(map(str, sys.version_info)) + '.pickle'
-
- if force or not _newer(gp, gt):
- logger.info('Generating grammar tables from %s', gt)
- g = pgen.generate_grammar(gt)
- if save:
- logger.info('Writing grammar tables to %s', gp)
-
- try:
- g.dump(gp)
- except IOError:
- e = None
- logger.info('Writing failed:' + str(e))
- except:
- None<EXCEPTION MATCH>IOError
-
-
- None<EXCEPTION MATCH>IOError
- else:
- g = grammar.Grammar()
- g.load(gp)
- return g
-
-
- def _newer(a, b):
- '''Inquire whether file a was written since file b.'''
- if not os.path.exists(a):
- return False
- if not os.path.exists(b):
- return True
- return os.path.getmtime(a) >= os.path.getmtime(b)
-
-